- Nginx 搭建RTMP服务器
- android端代码以及步骤
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
static int publish_file_stream(const char *input, const char *output) {
//注意这里要写=NULL 不然会崩溃
AVFormatContext *pInAvFormatContext = NULL;
AVFormatContext *pOutAvFormatContext = NULL;
AVOutputFormat *pAvOutputFormat = NULL;
int videoIndex = 0;
//1. 注册
av_register_all();
LOGE("%s", output);
//2. 获取输入的文件信息
//打开文件
if (avformat_open_input(&pInAvFormatContext, input, NULL, NULL) != 0) {
LOGE("打开文件失败!");
return -1;
}
//获取流信息
if (avformat_find_stream_info(pInAvFormatContext, NULL) < 0) {
LOGE("获取文件流失败!");
return -1;
}
//找到video的对应位置
int i = 0;
for (; i < pInAvFormatContext->nb_streams; i++) {
if (pInAvFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
videoIndex = i;
break;
}
}
//打印信息
// av_dump_format(pAvFormatContext,0,input,0);
//3. 输出设置
//初始化输出AVFormatContext获取 AVOutputFormat进行设置
avformat_alloc_output_context2(&pOutAvFormatContext, NULL, "flv", output);
if (!pOutAvFormatContext) {
LOGE("初始化输出AVFormatContext失败");
return -1;
}
pAvOutputFormat = pOutAvFormatContext->oformat;
i = 0;
for (; i < pInAvFormatContext->nb_streams; i++) {
AVStream *in_stream = pInAvFormatContext->streams[i];
AVStream *out_stream = avformat_new_stream(pOutAvFormatContext, in_stream->codec->codec);
if (!out_stream) {
LOGE("初始化out_stream失败");
return -1;
}
//复制AVCodecContext的设置(Copy the settings of AVCodecContext)
if (avcodec_copy_context(out_stream->codec, in_stream->codec) != 0) {
LOGE("copy AVCodecContext设置失败");
return -1;
}
out_stream->codec->codec_tag = 0;
if (pOutAvFormatContext->oformat->flags & AVFMT_GLOBALHEADER)
out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
//打印
// av_dump_format(pOutAvFormatContext,0,output,0);
//打开输出文件/url
if (!(pAvOutputFormat->flags & AVFMT_NOFILE)) {
int ret = avio_open(&pOutAvFormatContext->pb, output, AVIO_FLAG_WRITE);
if (ret < 0) {
LOGE("打开输出文件或者url失败 %d", ret);
return -1;
}
}
//4.写入数据
//写文件头
if (avformat_write_header(pOutAvFormatContext, NULL) != 0) {
LOGE("写入头数据失败");
return -1;
}
int64_t start_time = av_gettime();
AVPacket pkt;
int frameIndex = 0;
//写入数据源
while (av_read_frame(pInAvFormatContext, &pkt) >= 0) {
AVStream *in_stream, *out_stream;
if (pkt.pts == AV_NOPTS_VALUE) {
//Write PTS
AVRational time_base1 = pInAvFormatContext->streams[videoIndex]->time_base;
//Duration between 2 frames (us)
int64_t calc_duration = (double) AV_TIME_BASE /
av_q2d(pInAvFormatContext->streams[videoIndex]->r_frame_rate);
//Parameters
pkt.pts = (double) (frameIndex * calc_duration) /
(double) (av_q2d(time_base1) * AV_TIME_BASE);
pkt.dts = pkt.pts;
pkt.duration = (double) calc_duration / (double) (av_q2d(time_base1) * AV_TIME_BASE);
}
//Important:Delay
if (pkt.stream_index == videoIndex) {
AVRational time_base = pInAvFormatContext->streams[videoIndex]->time_base;
AVRational time_base_q = {1, AV_TIME_BASE};
int64_t pts_time = av_rescale_q(pkt.dts, time_base, time_base_q);
int64_t now_time = av_gettime() - start_time;
if (pts_time > now_time)
av_usleep(pts_time - now_time);
}
in_stream = pInAvFormatContext->streams[pkt.stream_index];
out_stream = pOutAvFormatContext->streams[pkt.stream_index];
/* copy packet */
//转换PTS/DTS(Convert PTS/DTS)
pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base,
(AVRounding) (AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base,
(AVRounding) (AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
pkt.pos = -1;
//Print to Screen
if (pkt.stream_index == videoIndex) {
LOGE("Send %8d video frames to output URL\n", frameIndex);
frameIndex++;
}
if (av_interleaved_write_frame(pOutAvFormatContext, &pkt) < 0) {
LOGE("Error muxing packet\n");
break;
}
av_packet_unref(&pkt);
}
//写文件尾(Write file trailer)
av_write_trailer(pOutAvFormatContext);
avformat_close_input(&pInAvFormatContext);
/* close output */
if (pOutAvFormatContext && !(pAvOutputFormat->flags & AVFMT_NOFILE))
avio_close(pOutAvFormatContext->pb);
avformat_free_context(pOutAvFormatContext);
return 0;
}
JNIEXPORT jint JNICALL
Java_zzw_com_ffmpegdemo_VideoUtils_publish_1file_1stream(JNIEnv *env, jclass type, jstring input_,
jstring output_) {
const char *input = env->GetStringUTFChars(input_, 0);
const char *output = env->GetStringUTFChars(output_, 0);
//input: /storage/emulated/0/aaaaa/dst.mp4
//output: rtmp://192.168.18.231:8082/live/room
int ret = publish_file_stream(input, output);
env->ReleaseStringUTFChars(input_, input);
env->ReleaseStringUTFChars(output_, output);
return ret;
}
参考连接:
https://blog.csdn.net/leixiaohua1020/article/details/39803457